Table of Contents

SSHA AVISO#

ssha = xr.open_dataset('../datasets/ssh/dataset-duacs-rep-global-merged-allsat-phy-l4-v3_1522711420825.nc', autoclose=True)

ssha['EKE'] = np.hypot(ssha.ugosa, ssha.vgosa)
ssha['MKE'] = np.hypot(ssha.ugos, ssha.vgos)
ssha
<xarray.Dataset>
Dimensions:    (latitude: 93, longitude: 73, time: 4384)
Coordinates:
  * time       (time) datetime64[ns] 2005-01-01 2005-01-02 ... 2017-01-01
  * longitude  (longitude) float32 77.875 78.125 78.375 ... 95.375 95.625 95.875
  * latitude   (latitude) float32 1.875 2.125 2.375 ... 24.375 24.625 24.875
Data variables:
    vgosa      (time, latitude, longitude) float64 ...
    vgos       (time, latitude, longitude) float64 ...
    sla        (time, latitude, longitude) float64 ...
    ugosa      (time, latitude, longitude) float64 ...
    ugos       (time, latitude, longitude) float64 ...
    EKE        (time, latitude, longitude) float64 0.2249 0.1963 ... nan nan
    MKE        (time, latitude, longitude) float64 0.3003 0.2232 ... nan nan
Attributes:
    title:            DT all-sat-merged Global Ocean Gridded SSALTO/DUACS Sea...
    institution:      CLS, CNES
    references:       http://marine.copernicus.eu
    source:           Altimetry measurements
    Conventions:      CF-1.0
    history:          Data extracted from dataset http://misgw-sltac.vlandata...
    time_min:         20089.0
    time_max:         24472.0
    julian_day_unit:  days since 1950-01-01 00:00:00
    latitude_min:     1.875
    latitude_max:     24.875
    longitude_min:    77.875
    longitude_max:    95.875
import holoviews as hv
import geoviews as gv

hv.notebook_extension('matplotlib', 'bokeh')

import warnings
/home/deepak/anaconda3/lib/python3.6/site-packages/numba/npyufunc/dufunc.py:88: DeprecationWarning: inspect.getargspec() is deprecated, use inspect.signature() or inspect.getfullargspec()
  super(DUFunc, self).__init__(dispatcher, **kws)
/home/deepak/anaconda3/lib/python3.6/site-packages/numba/npyufunc/dufunc.py:88: DeprecationWarning: inspect.getargspec() is deprecated, use inspect.signature() or inspect.getfullargspec()
  super(DUFunc, self).__init__(dispatcher, **kws)
/home/deepak/anaconda3/lib/python3.6/site-packages/numba/npyufunc/dufunc.py:88: DeprecationWarning: inspect.getargspec() is deprecated, use inspect.signature() or inspect.getfullargspec()
  super(DUFunc, self).__init__(dispatcher, **kws)
/home/deepak/anaconda3/lib/python3.6/site-packages/numba/npyufunc/dufunc.py:88: DeprecationWarning: inspect.getargspec() is deprecated, use inspect.signature() or inspect.getfullargspec()
  super(DUFunc, self).__init__(dispatcher, **kws)
/home/deepak/anaconda3/lib/python3.6/importlib/_bootstrap.py:219: ImportWarning: can't resolve package from __spec__ or __package__, falling back on __name__ and __path__
  return f(*args, **kwds)
/home/deepak/anaconda3/lib/python3.6/importlib/_bootstrap.py:219: ImportWarning: can't resolve package from __spec__ or __package__, falling back on __name__ and __path__
  return f(*args, **kwds)
/home/deepak/anaconda3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility. Expected 96, got 88
  return f(*args, **kwds)
/home/deepak/anaconda3/lib/python3.6/site-packages/nbconvert/exporters/exporter_locator.py:28: DeprecationWarning: `nbconvert.exporters.exporter_locator` is deprecated in favor of `nbconvert.exporters.base` since nbconvert 5.0.
  DeprecationWarning)
/home/deepak/anaconda3/lib/python3.6/site-packages/nbconvert/preprocessors/regexremove.py:41: DeprecationWarning: Traits should be given as instances, not types (for example, `Int()`, not `Int`). Passing types is deprecated in traitlets 4.1.
  patterns = List(Unicode, default_value=[r'\Z']).tag(config=True)
/home/deepak/anaconda3/lib/python3.6/site-packages/traitlets/traitlets.py:2367: DeprecationWarning: Traits should be given as instances, not types (for example, `Int()`, not `Int`). Passing types is deprecated in traitlets 4.1.
  super(Set, self).__init__(trait, default_value, minlen, maxlen, **kwargs)
%%output holomap='scrubber'
%%opts Image style(cmap='Reds') plot[colorbar=True]

warnings.simplefilter('ignore')
gvds = gv.Dataset(ssha.sla.sel(time='2014'))
gvds.to(hv.Image, ['longitude', 'latitude'])
%matplotlib inline

f, ax = plt.subplots(2, 1, sharex=True)

region = dict(latitude=[12, 15], longitude=90, method='nearest')

ssha.sla.sel(**region).plot.line(hue='latitude', ax=ax[0])
ssha.EKE.sel(**region).plot.line(hue='latitude', ax=ax[1])
[<matplotlib.lines.Line2D at 0x7f9b64bdcf60>,
 <matplotlib.lines.Line2D at 0x7f9b64be44e0>]
_images/sat-data_5_1.png
# load merged turbulence data
turb = xr.open_dataset('./bay_merged_hourly.nc', autoclose=True)
turb
<xarray.Dataset>
Dimensions:     (depth: 12, lat: 5, lon: 4, time: 19056)
Coordinates:
  * time        (time) datetime64[ns] 2013-11-29 2013-11-29T01:00:00 ...
  * depth       (depth) float64 15.0 28.0 30.0 45.0 48.0 55.0 56.0 69.0 75.0 ...
  * lat         (lat) float64 5.0 6.5 8.0 12.0 15.0
  * lon         (lon) float64 85.5 87.0 88.5 90.0
Data variables:
    mld         (time, lat, lon) float64 ...
    ild         (time, lat, lon) float64 ...
    z           (time, lat, lon, depth) float64 ...
    ρ           (time, lat, lon, depth) float64 ...
    S           (time, lat, lon, depth) float64 ...
    T           (time, lat, lon, depth) float64 ...
    KT          (time, lat, lon, depth) float64 ...
    mean_depth  (time, lat, lon, depth) float64 ...
    Jq          (time, lat, lon, depth) float64 ...
    chi-t       (time, lat, lon, depth) float64 ...
    epsilon     (time, lat, lon, depth) float64 ...
%matplotlib inline

lat = 8; lon=85.5;

f, ax = plt.subplots(2,1, sharex=True)
(turb.KT.sel(lon=lon, lat=lat, method='nearest')
 .dropna(dim='depth', how='all')
 .resample(time='D').mean(dim='time')
 .plot.line(x='time', ax=ax[0]))
ax[0].set_yscale('log')
ax[0].set_ylim([1e-7, 1e-2])

(ssha.EKE.sel(longitude=lon, latitude=lat, method='nearest')
 .sel(time=slice('2014', '2015'))
 .plot(x='time', ax=ax[1]))
[<matplotlib.lines.Line2D at 0x7fd040f50080>]
_images/sat-data_7_1.png

Climatological cycle of SSH EKE#

# Let's look at seasonal cycle of SSH EKE in the Bay

import sciviscolor as svc
import bay


EKEclim = ssha.EKE.groupby('time.month').mean(dim='time')

fg = EKEclim.plot(x='longitude', y='latitude', col='month', col_wrap=3, 
                  add_colorbar=False, cmap=svc.cm.blue_orange_div)
fg.map(bay.plots.mark_moors)
<xarray.plot.facetgrid.FacetGrid at 0x7f7ace4b5b00>
_images/sat-data_9_1.png

climatological monthly std#

EKEclim = ssha.EKE.groupby('time.month').std(dim='time')

fg = EKEclim.plot(x='longitude', y='latitude', col='month', col_wrap=3, 
                  add_colorbar=False, cmap=cm)
fg.map(mark_moors)
<xarray.plot.facetgrid.FacetGrid at 0x7fd033ddf780>
_images/sat-data_11_1.png

2014 monthly mean SSH EKE normalized by climatological monthly mean#

Looks like NRL3 (505, 511) are my best bet.

EKEclim = ssha.EKE.groupby('time.month').mean(dim='time')

cm = sciviscolor.make_cmap('/home/deepak/python/sciviscolor/KeyColormaps/green-1.xml')

fg = (((ssha.EKE.sel(time='2014')
      .groupby('time.month').mean(dim='time'))
      /(ssha.EKE
      .groupby('time.month').mean(dim='time')))
      .plot(x='longitude', y='latitude', col='month', col_wrap=3, 
            add_colorbar=False, cmap=cm))
fg.map(mark_moors)

plt.gcf().suptitle('2014 monthly mean SSH EKE normalized by climatological monthly mean', y=1.02)
plt.savefig('images/eke-norm-clim-2014.png', bbox_inches='tight')
_images/sat-data_13_0.png

2014 monthly std#

# SSHvarclim = ssha.sla.groupby('time.month').var(dim='time')
w
fg = (ssha.sla.sel(time='2014')
      .groupby('time.month')
      .std(dim='time')
      .plot(x='longitude', y='latitude', col='month', col_wrap=3, 
            add_colorbar=False, cmap=cm))
fg.map(mark_moors)
<xarray.plot.facetgrid.FacetGrid at 0x7fd8b2b99ac8>
_images/sat-data_15_1.png

Monthly mean SLA (SW + post-SW)#

monmean = (ssha.sla.rename({'latitude': 'lat', 'longitude': 'lon'})
           .sel(**bay.region)
           .sel(time='2014')
           .groupby('time.month')
           .mean(dim='time'))

g = (monmean
     .sel(month=slice(6, 11))
     .plot(row='month', col_wrap=3, aspect=0.7, robust=True, add_colorbar=False))

g.map(bay.plots.mark_moors, color='k')

g.add_colorbar(label='2014 Monthly mean SLA [m]')

plt.savefig('../images/2014-monthly-mean-sla.png', bbox_inches='tight')
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-1-636d4d1c1930> in <module>()
----> 1 monmean = (ssha.sla.rename({'latitude': 'lat', 'longitude': 'lon'})
      2            .sel(**bay.region)
      3            .sel(time='2014')
      4            .groupby('time.month')
      5            .mean(dim='time'))

NameError: name 'ssha' is not defined

NRL1#

import bay

g = (ssha.sla.sel(time=slice('2014-01-22', '2014-02-02'))
     .plot(robust=True, col='time', col_wrap=4, add_colorbar=False))
#g.add_colorbar()
g.map(bay.plots.mark_moors, color='k')
<xarray.plot.facetgrid.FacetGrid at 0x7fea30fafe10>
_images/sat-data_19_1.png
import bay

mur = xr.open_mfdataset('../datasets/mur/*.nc', autoclose=True).sel(**bay.region)

sliced = mur.analysed_sst.sel(time=slice('2014-01-22', '2014-02-02')).load()
sliced
<xarray.DataArray 'analysed_sst' (time: 12, lat: 1801, lon: 1401)>
array([[[300.083  , 300.054  , ..., 300.788  , 300.78198],
        [300.055  , 300.05   , ..., 300.75998, 300.74   ],
        ...,
        [      nan,       nan, ...,       nan,       nan],
        [      nan,       nan, ...,       nan,       nan]],

       [[300.057  , 300.031  , ..., 301.54898, 301.541  ],
        [300.038  , 300.03198, ..., 301.382  , 301.38   ],
        ...,
        [      nan,       nan, ...,       nan,       nan],
        [      nan,       nan, ...,       nan,       nan]],

       ...,

       [[300.059  , 300.07   , ..., 300.175  , 300.206  ],
        [300.044  , 300.053  , ..., 300.132  , 300.167  ],
        ...,
        [      nan,       nan, ...,       nan,       nan],
        [      nan,       nan, ...,       nan,       nan]],

       [[300.057  , 300.076  , ..., 300.66098, 300.735  ],
        [300.067  , 300.091  , ..., 300.552  , 300.651  ],
        ...,
        [      nan,       nan, ...,       nan,       nan],
        [      nan,       nan, ...,       nan,       nan]]], dtype=float32)
Coordinates:
  * lat      (lat) float32 6.0 6.01 6.02 6.03 6.04 6.05 6.06 6.07 6.08 6.09 ...
  * lon      (lon) float32 80.0 80.01 80.02 80.03 80.04 80.05 80.06 80.07 ...
  * time     (time) datetime64[ns] 2014-01-22T09:00:00 2014-01-23T09:00:00 ...
Attributes:
    long_name:      analysed sea surface temperature
    standard_name:  sea_surface_foundation_temperature
    units:          kelvin
    valid_min:      -32767
    valid_max:      32767
g = ((sliced-sliced.mean(dim='time'))
     .plot(robust=True, col='time', col_wrap=4, add_colorbar=False))
#g.add_colorbar()
g.map(bay.plots.mark_moors, color='k')
<xarray.plot.facetgrid.FacetGrid at 0x7f11a6c348d0>
_images/sat-data_21_1.png

OSCAR#

import bay

oscar = (xr.open_mfdataset('/home/deepak/datasets/oscar/oscar_vel*.nc',
                           drop_variables=['year'])
         .squeeze()
         .rename({'latitude': 'lat', 'longitude': 'lon'})
         .sel(lon=slice(80, 94), lat=slice(24, 4))).load()

monmean = oscar.sel(time='2014').groupby('time.month').mean(dim='time')
/home/deepak/work/python/xarray/xarray/backends/api.py:611: FutureWarning: The autoclose argument is no longer used by xarray.open_dataset() and is now ignored; it will be removed in xarray v0.12. If necessary, you can control the maximum number of simultaneous open files with xarray.set_options(file_cache_maxsize=...).
  datasets = [open_(p, **open_kwargs) for p in paths]
/home/deepak/work/python/xarray/xarray/backends/api.py:611: FutureWarning: The autoclose argument is no longer used by xarray.open_dataset() and is now ignored; it will be removed in xarray v0.12. If necessary, you can control the maximum number of simultaneous open files with xarray.set_options(file_cache_maxsize=...).
  datasets = [open_(p, **open_kwargs) for p in paths]
/home/deepak/work/python/xarray/xarray/backends/api.py:611: FutureWarning: The autoclose argument is no longer used by xarray.open_dataset() and is now ignored; it will be removed in xarray v0.12. If necessary, you can control the maximum number of simultaneous open files with xarray.set_options(file_cache_maxsize=...).
  datasets = [open_(p, **open_kwargs) for p in paths]
/home/deepak/work/python/xarray/xarray/backends/api.py:611: FutureWarning: The autoclose argument is no longer used by xarray.open_dataset() and is now ignored; it will be removed in xarray v0.12. If necessary, you can control the maximum number of simultaneous open files with xarray.set_options(file_cache_maxsize=...).
  datasets = [open_(p, **open_kwargs) for p in paths]
/home/deepak/work/python/xarray/xarray/core/nanops.py:161: RuntimeWarning: Mean of empty slice
  return np.nanmean(a, axis=axis, dtype=dtype)
%matplotlib qt

import animatplot as amp
import cartopy.crs as ccrs

ax = plt.axes(projection=ccrs.PlateCarree())
bay.plot_coastline(ax)
timeline = amp.Timeline(oscar.time)

bay.mark_moors(ax=ax, labels=False, color='k', markersize=8, zorder=-1)

lon, lat = xr.broadcast(oscar.lon, oscar.lat)
block1 = amp.blocks.Quiver(lon, lat, oscar.u.T, oscar.v.T, 
                           ax=ax, t_axis=oscar.u.T.get_axis_num('time'))
anim = amp.Animation([block1], timeline)

anim.toggle()
anim.timeline_slider(valfmt='%s')
anim.save_gif('../images/oscar.gif')
anim.save('oscar.mp4')
g = xr.plot.FacetGrid(monmean.u, col='month', col_wrap=4, aspect=0.6)

def quiver(month, u, v):
    plt.quiver(u.lon, u.lat, u.sel(month=month), v.sel(month=month))
   
g.map(quiver, 'month', u=monmean.u, v=monmean.v)
g.set_xlabels('Longitude')
g.set_ylabels('Latitude')
g.map(lambda: plt.xlim([80, 92]))
g.map(lambda: plt.ylim([4, 23]))
# g.map(bay.plots.mark_moors, color='k')
#cb = g.add_colorbar(orientation='horizontal')
<xarray.plot.facetgrid.FacetGrid at 0x7f87b6ec6780>
_images/sat-data_27_1.png
g = (ssha.EKE.sel(time='2014')
     .groupby('time.month').mean(dim='time')
     .plot(col='month', col_wrap=4, aspect=0.6,
           cmap=mpl.cm.Reds, cbar_kwargs={'orientation': 'horizontal', 'pad': 0.06}))

def quiver(month, u, v):
    plt.quiver(u.lon, u.lat, u.sel(month=month), v.sel(month=month))
   
g.map(quiver, 'month', u=monmean.u, v=monmean.v)
g.map(bay.mark_moors, labels=False, markersize=6)
g.map(lambda: plt.xlim([80, 92]))
g.map(lambda: plt.ylim([4, 23]))

plt.savefig('../images/2014-monthly-mean-eke-oscar-vectors.png', bbox_inches='tight')
/home/deepak/work/python/xarray/xarray/core/nanops.py:162: RuntimeWarning: Mean of empty slice
  return np.nanmean(a, axis=axis, dtype=dtype)
_images/sat-data_28_1.png

SMAP SSS#

file = '../datasets/SMAP/SMAP2015.mat'

NIW input#

This is nonsense; I need inertial frequency currents.

oscar = (xr.open_mfdataset('/home/deepak/datasets/oscar/oscar_vel*.nc',
                           autoclose=True, drop_variables=['year'])
         .squeeze()
         .rename({'latitude': 'lat', 'longitude': 'lon'})
         .drop(['um', 'vm'])
         .sel(lon=slice(80, 94), lat=slice(24, 4))).load()

taux = xr.open_mfdataset('../datasets/tropflux/taux_tropflux_1d_*.nc', autoclose=True)
tauy = xr.open_mfdataset('../datasets/tropflux/tauy_tropflux_1d_*.nc', autoclose=True)

tau = (xr.merge([taux, tauy])
       .rename({'latitude': 'lat', 'longitude': 'lon'}))
import bay

oscari = oscar.sel(time='2014').interp_like(tau.sel(time='2014'))
taui = tau.sel(time='2014')

utau = (oscari.u * taui.taux + oscari.v * taui.tauy).sel(**bay.region)
utau.attrs['long_name'] = 'NIW input uτ'
utau.sel(lon=88.5, lat=8, method='nearest').plot()
utau.sel(lon=87, lat=8, method='nearest').plot()
utau.sel(lon=85.5, lat=8, method='nearest').plot()
utau.sel(lon=85.5, lat=5, method='nearest').plot()

utau.sel(**bay.ebob_region).mean(['lat', 'lon']).plot(color='k', lw=2)

plt.legend(['NRL5', 'NRL4', 'NRL3', 'NRL1', 'EBOB mean'])
/home/deepak/anaconda3/lib/python3.6/site-packages/dask/array/numpy_compat.py:28: RuntimeWarning: invalid value encountered in true_divide
  x = np.divide(x1, x2, out)
<matplotlib.legend.Legend at 0x7fd95702e240>
_images/sat-data_35_2.png
utau.groupby('time.month').mean('time').plot(col='month', col_wrap=3, robust=True)
plt.suptitle('near inertial wave input uτ for 2014', y=1.05)
/home/deepak/anaconda3/lib/python3.6/site-packages/dask/array/numpy_compat.py:28: RuntimeWarning: invalid value encountered in true_divide
  x = np.divide(x1, x2, out)
Text(0.5, 1.05, 'near inertial wave input uτ for 2014')
_images/sat-data_36_2.png
drifter = (xr.open_dataset('/home/deepak/datasets/Global_climatology_Inertial_Current_Characteristics.nc')
           .rename({'x': 'Longitude', 'y': 'Latitude'})
           .rename({'Longitude': 'lon', 'Latitude': 'lat'})
           .set_coords(['lat', 'lon'])
           .sel(**bay.region))
drifter['season'] = np.array([1, 2, 3, 4])
drifter
<xarray.Dataset>
Dimensions:    (lat: 10, lon: 8, season: 4)
Coordinates:
  * lon        (lon) int16 80 82 84 86 88 90 92 94
  * lat        (lat) int16 5 7 9 11 13 15 17 19 21 23
  * season     (season) int64 1 2 3 4
Data variables:
    Vi         (lat, lon) float32 ...
    Ri         (lat, lon) float32 ...
    MLEi       (lat, lon) float32 ...
    Vi_seas    (lat, lon, season) float32 ...
    Ri_seas    (lat, lon, season) float32 ...
    MLEi_seas  (lat, lon, season) float32 ...
Attributes:
    TITLE:        Global Climatology of Near-Inertial Current Characteristics...
    PRODUCTION:   Alexis Chaigneau - Sept. 20, 2011 -
    DESCRIPTION:  Inertial current amplitudes and Mixed-layer energy related ...
    REFERENCE:    Chaigneau, A., O. Pizarro, and W. Rojas (2008), Global clim...
drifter.Vi_seas.mean(['lat', 'lon']).plot()
[<matplotlib.lines.Line2D at 0x7fd956ccc9b0>]
_images/sat-data_38_1.png
nrl5 = bay.read_nrl5()
/home/deepak/python/moor/moor.py:350: UserWarning: Found large gap. NaNing out...
  warnings.warn('Found large gap. NaNing out...')
import dcpy.ts

vel = nrl5.vel.isel(depth=4)
ui = dcpy.ts.BandPassButter(vel.u,
                            freqs=[0.6*nrl5.inertial/24, nrl5.inertial*1.5/24],
                            dim='time')
vi = dcpy.ts.BandPassButter(vel.v,
                            freqs=[0.6*nrl5.inertial/24, nrl5.inertial*1.5/24],
                            dim='time')

# dcpy.ts.PlotSpectrum(nrl5.vel.isel(depth=2).u, multitaper=True)
# dcpy.ts.PlotSpectrum(ui, multitaper=True, ax=plt.gca())
# dcpy.ts.linex(nrl5.inertial)
# plt.gca().set_ylim([1e-7, 1])

niw_input = (nrl5.tropflux.taux.interp_like(ui)*ui 
             + nrl5.tropflux.tauy.interp_like(vi)*vi)
/home/deepak/python/dcpy/ts.py:881: RuntimeWarning: invalid value encountered in multiply
  out = np.empty(input.shape) * np.nan
<Figure size 780x660 with 0 Axes>
%matplotlib inline 

import xrscipy as xrsp

xrsp.integrate.cumtrapz(niw_input.dropna('time'), 'time').plot()
[<matplotlib.lines.Line2D at 0x7fd9559b6f28>]
_images/sat-data_41_1.png
((niw_input['time'][1]-niw_input.time[0])/np.timedelta64(1, 'D')).values
array(0.04166667)

animatplot test#

import animatplot as amp
tropflux = (xr.merge([xr.open_mfdataset('../datasets/tropflux/'+var+'_tropflux_1d_*.nc')
                      .sel(time=slice('2013-12-01', '2014-12-01'))
                      for var in ['taux', 'tauy']])
            .rename({'latitude': 'lat', 'longitude': 'lon'})
            .sel(lon=slice(70, 106), lat=slice(0, 26))
            .transpose('time', 'lon', 'lat'))

tvec = pd.to_datetime(tropflux.time.values).floor('D').values.astype('str')
tropflux
<xarray.Dataset>
Dimensions:  (lat: 26, lon: 36, time: 366)
Coordinates:
  * lon      (lon) float32 70.5 71.5 72.5 73.5 74.5 ... 102.5 103.5 104.5 105.5
  * lat      (lat) float32 0.5 1.5 2.5 3.5 4.5 5.5 ... 21.5 22.5 23.5 24.5 25.5
  * time     (time) datetime64[ns] 2013-12-01T11:59:59.999959808 ... 2014-12-01T11:59:59.999959808
Data variables:
    taux     (time, lon, lat) float32 dask.array<shape=(366, 36, 26), chunksize=(31, 36, 26)>
    tauy     (time, lon, lat) float32 dask.array<shape=(366, 36, 26), chunksize=(31, 36, 26)>
%matplotlib qt

import cartopy.crs as ccrs

ax = plt.axes(projection=ccrs.PlateCarree())
bay.plot_coastline(ax)
timeline = amp.Timeline(tropflux.time)

lon, lat = xr.broadcast(tropflux.lon, tropflux.lat)
block1 = amp.blocks.Quiver(lon, lat, tropflux.taux, tropflux.tauy, 
                           ax=ax, t_axis=tropflux.taux.get_axis_num('time'))
anim = amp.Animation([block1], timeline)

anim.toggle()
anim.timeline_slider(valfmt='%s')
trmm = dcpy.oceans.read_trmm()
imerg = dcpy.oceans.read_imerg()
oscar = dcpy.oceans.read_oscar()

ssha = (xr.open_dataset('../datasets/ssh/dataset-duacs-rep-global-merged-allsat-phy-l4-v3_1522711420825.nc').
       rename({'latitude': 'lat', 'longitude': 'lon'}))
rain = (trmm.precipitation.sel(time=slice('2013-12-01', '2014-12-01'))
        .interp(lat=np.linspace(trmm.lat.min(), trmm.lat.max(), 300),
                lon=np.linspace(trmm.lon.min(), trmm.lon.max(), 300)).load()
        .sel(lon=slice(78, 96), lat=slice(0, 26)))

cur = (oscar.sel(time=slice('2013-12-01', '2014-12-01')).sortby('lat').load()
       .interp(time=rain.time)
      .sel(lon=slice(78, 96), lat=slice(0, 26)))

sla = (ssha.sla.sel(time=slice('2013-12-01', '2014-12-01')).load()
       .interp(time=rain.time,
               lat=np.linspace(ssha.lat.min(), ssha.lat.max(), 300),
               lon=np.linspace(ssha.lon.min(), ssha.lon.max(), 300))
      .sel(lon=slice(78, 96), lat=slice(0, 26)))
# rain = imerg.HQprecipitation
import cartopy.crs as ccrs

f, ax = plt.subplots(
    1, 2, subplot_kw={'projection': ccrs.PlateCarree()}, constrained_layout=True)

bay.plot_coastline(ax[0], facecolor='none', zorder=20)
bay.plot_coastline(ax[1], facecolor='none', zorder=20)
timeline = amp.Timeline(rain.time)

block_precip = amp.blocks.Pcolormesh(rain.lon, rain.lat, rain,
                                     ax=ax[0], t_axis=rain.get_axis_num(
                                         'time'),
                                     cmap=dcpy.plots.rain_colormap(),
                                     vmin=0, vmax=90, zorder=10)

lon, lat = xr.broadcast(tropflux.lon, tropflux.lat)
block_tau = amp.blocks.Quiver(lon, lat, tropflux.taux, tropflux.tauy,
                              ax=ax[0],
                              t_axis=tropflux.taux.get_axis_num('time'),
                              zorder=25)

block_title0 = amp.blocks.Title('Wind stress, precip | {time}',
                                time=list(rain.time.to_pandas().index.strftime('%Y-%b-%d')), ax=ax[0])

lon, lat = xr.broadcast(cur.lon, cur.lat)
block_oscar = amp.blocks.Quiver(lon, lat, cur.u.T, cur.v.T,
                                ax=ax[1], t_axis=cur.u.T.get_axis_num('time'),
                                zorder=20, scale=30)

block_sla = amp.blocks.Pcolormesh(sla.lon, sla.lat, np.ma.array(sla.values, mask=np.isnan(sla.values)),
                                  ax=ax[1], t_axis=sla.get_axis_num('time'),
                                  cmap=mpl.cm.RdBu_r)
block_title1 = amp.blocks.Title('OSCAR currents, SSHA | {time}',
                                time=list(rain.time.to_pandas().index.strftime('%Y-%b-%d')), ax=ax[1])

ax[0].set_extent([78, 96, 2, 24])
ax[1].set_extent([78, 96, 2, 24])
# plt.gcf().colorbar(block_precip.quad, ax=ax[0], extend='max')
anim = amp.Animation([block_precip, block_tau, block_title0,
                      block_oscar, block_sla, block_title1],
                     timeline)

# anim.toggle()
# anim.timeline_slider()
/home/deepak/anaconda3/lib/python3.6/site-packages/matplotlib/_constrained_layout.py:226: UserWarning: constrained_layout not applied.  At least one axes collapsed to zero width or height.
  warnings.warn('constrained_layout not applied.  At least '
anim.save('../videos/summary.mp4')